import json
import copy
import time
import random
import numpy as np
import torch
import torch.nn as nn
import torch.optim as optim
import matplotlib.pyplot as plt
from matplotlib import pyplot as plt
from torchsummary import summary
from nmfd_gnn import NMFD_GNN
print (torch.cuda.is_available())
device = torch.device("cuda:0")
random_seed = 42
random.seed(random_seed)
torch.manual_seed(random_seed)
torch.cuda.manual_seed(random_seed)
r = random.random
True
#1.1: settings
M = 10 #number of time interval in a window
missing_ratio = 0.50
file_name = "m_" + str(M) + "_missing_" + str(int(missing_ratio*100))
print (file_name)
#1.2: hyperparameters
num_epochs, batch_size, learning_rate = 200, 16, 0.001
beta_flow, beta_occ, beta_phy = 1.0, 1.0, 0.1
batch_size_vt = 16 #batch size for evaluation and test
hyper = {"n_e": num_epochs, "b_s": batch_size, "b_s_vt": batch_size_vt, "l_r": learning_rate,\
"beta_f": beta_flow, "beta_o": beta_occ, "beta_p": beta_phy}
gnn_dim_1, gnn_dim_2, gnn_dim_3, lstm_dim = 2, 128, 128, 128
p_dim = 10 #column dimension of L1, L2
c_k = 5.5 #meter, the sum of loop width and uniform vehicle length. based on Gero and Daganzo 2008.
theta_ini = [-2.757, 4.996, -2.409, 1.638, 3.569]
hyper_model = {"g_dim_1": gnn_dim_1, "g_dim_2": gnn_dim_2, "g_dim_3": gnn_dim_3, "l_dim": lstm_dim,\
"p_dim": p_dim, "c_k": c_k, "theta_ini": theta_ini}
max_no_decrease = 30
#1.3: set paths
root_path = "/home/umni2/a/umnilab/users/xue120/umni4/2023_mfd_traffic/"
file_path = root_path + "2_prepare_data/" + file_name + "/"
train_path, vali_path, test_path =\
file_path + "train.json", file_path + "vali.json", file_path + "test.json"
sensor_id_path = file_path + "sensor_id_order.json"
sensor_adj_path = file_path + "sensor_adj.json"
mean_std_path = file_path + "mean_std.json"
m_10_missing_50
def visualize_train_loss(total_phy_flow_occ_loss):
plt.figure(figsize=(4,3), dpi=75)
t_p_f_o_l = np.array(total_phy_flow_occ_loss)
e_loss, p_loss, f_loss, o_loss = t_p_f_o_l[:,0], t_p_f_o_l[:,1], t_p_f_o_l[:,2], t_p_f_o_l[:,3]
x = range(len(e_loss))
plt.plot(x, p_loss, linewidth=1, label = "phy loss")
plt.plot(x, f_loss, linewidth=1, label = "flow loss")
plt.plot(x, o_loss, linewidth=1, label = "occ loss")
plt.legend()
plt.title('Loss decline on train')
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.savefig(file_name + '/' + 'train_loss.png', bbox_inches = 'tight')
plt.show()
def visualize_flow_loss(vali_f_mae, test_f_mae):
plt.figure(figsize=(4,3), dpi=75)
x = range(len(vali_f_mae))
plt.plot(x, vali_f_mae, linewidth=1, label="Validate")
plt.plot(x, test_f_mae, linewidth=1, label="Test")
plt.legend()
plt.title('MAE of flow on validate/test')
plt.xlabel('Epoch')
plt.ylabel('MAE (veh/h)')
plt.savefig(file_name + '/' + 'flow_mae.png', bbox_inches = 'tight')
plt.show()
def visualize_occ_loss(vali_o_mae, test_o_mae):
plt.figure(figsize=(4,3), dpi=75)
x = range(len(vali_o_mae))
plt.plot(x, vali_o_mae, linewidth=1, label="Validate")
plt.plot(x, test_o_mae, linewidth=1, label="Test")
plt.legend()
plt.title('MAE of occupancy on validate/test')
plt.xlabel('Epoch')
plt.ylabel('MAE')
plt.savefig(file_name + '/' + 'occ_mae.png',bbox_inches = 'tight')
plt.show()
def MAELoss(yhat, y):
return float(torch.mean(torch.div(torch.abs(yhat-y), 1)))
def RMSELoss(yhat, y):
return float(torch.sqrt(torch.mean((yhat-y)**2)))
def vali_test(model, f, f_mask, o, o_mask, f_o_mean_std, b_s_vt):
flow_std, occ_std, n = f_o_mean_std[1], f_o_mean_std[3], len(f)
f_mae_list, f_rmse_list, o_mae_list, o_rmse_list, num_list = list(), list(), list(), list(), list()
for i in range(0, n, b_s_vt):
s, e = i, np.min([i+b_s_vt, n])
num_list.append(e-s)
bf, bo, bf_mask, bo_mask = f[s: e], o[s: e], f_mask[s: e], o_mask[s: e]
bf_hat, bo_hat, bq_hat, bq_theta = model.run(bf_mask, bo_mask)
bf_hat, bo_hat = bf_hat.cpu(), bo_hat.cpu()
bf_mae, bf_rmse = MAELoss(bf_hat, bf)*flow_std, RMSELoss(bf_hat, bf)*flow_std
bo_mae, bo_rmse = MAELoss(bo_hat, bo)*occ_std, RMSELoss(bo_hat, bo)*occ_std
f_mae_list.append(bf_mae)
f_rmse_list.append(bf_rmse)
o_mae_list.append(bo_mae)
o_rmse_list.append(bo_rmse)
f_mae, o_mae = np.dot(f_mae_list, num_list)/n, np.dot(o_mae_list, num_list)/n
f_rmse = np.sqrt(np.dot(np.multiply(f_rmse_list, f_rmse_list), num_list)/n)
o_rmse = np.sqrt(np.dot(np.multiply(o_rmse_list, o_rmse_list), num_list)/n)
return f_mae, f_rmse, o_mae, o_rmse
def evaluate(model, vt_f, vt_o, vt_f_m, vt_o_m, f_o_mean_std, b_s_vt): #vt: vali_test
vt_f_mae, vt_f_rmse, vt_o_mae, vt_o_rmse =\
vali_test(model, vt_f, vt_f_m, vt_o, vt_o_m, f_o_mean_std, b_s_vt)
return vt_f_mae, vt_f_rmse, vt_o_mae, vt_o_rmse
#4.1: one training epoch
def train_epoch(model, opt, criterion, train_f_x, train_f_y, train_o_x, train_o_y, hyper, flow_std_squ):
#f: flow; o: occupancy
model.train()
losses, p_losses, f_losses, o_losses = list(), list(), list(), list()
beta_f, beta_o, beta_p, b_s = hyper["beta_f"], hyper["beta_o"], hyper["beta_p"], hyper["b_s"]
n = len(train_f_x)
print ("# batch: ", int(n/b_s))
for i in range(0, n-b_s, b_s):
time1 = time.time()
x_f_batch, y_f_batch = train_f_x[i: i+b_s], train_f_y[i: i+b_s]
x_o_batch, y_o_batch = train_o_x[i: i+b_s], train_o_y[i: i+b_s]
opt.zero_grad()
y_f_hat, y_o_hat, q_hat, q_theta = model.run(x_f_batch, x_o_batch)
p_loss = criterion(q_hat, q_theta).cpu() #physical loss
p_loss = p_loss/flow_std_squ
f_loss = criterion(y_f_hat.cpu(), y_f_batch) #data loss of flow
o_loss = criterion(y_o_hat.cpu(), y_o_batch) #data loss of occupancy
loss = beta_f*f_loss + beta_o*o_loss + beta_p*p_loss
loss.backward()
opt.step()
losses.append(loss.data.numpy())
p_losses.append(p_loss.data.numpy())
f_losses.append(f_loss.data.numpy())
o_losses.append(o_loss.data.numpy())
if i % (64*b_s) == 0:
print ("i_batch: ", i/b_s)
print ("the loss for this batch: ", loss.data.numpy())
print ("flow loss", f_loss.data.numpy())
print ("occ loss", o_loss.data.numpy())
time2 = time.time()
print ("time for this batch", time2-time1)
print ("----------------------------------")
n_loss = float(len(losses)+0.000001)
aver_loss = sum(losses)/n_loss
aver_p_loss = sum(p_losses)/n_loss
aver_f_loss = sum(f_losses)/n_loss
aver_o_loss = sum(o_losses)/n_loss
return aver_loss, model, aver_p_loss, aver_f_loss, aver_o_loss
#4.2: all train epochs
def train_process(model, criterion, train, vali, test, hyper, f_o_mean_std):
total_phy_flow_occ_loss = list()
n_mse_flow_occ = 0 #mse(flow) + mse(occ) for validation sets.
vali_f, vali_o = vali["flow"], vali["occupancy"]
vali_f_m, vali_o_m = vali["flow_mask"].to(device), vali["occupancy_mask"].to(device)
test_f, test_o = test["flow"], test["occupancy"]
test_f_m, test_o_m = test["flow_mask"].to(device), test["occupancy_mask"].to(device)
l_r, n_e = hyper["l_r"], hyper["n_e"]
opt = optim.Adam(model.parameters(), l_r, betas = (0.9,0.999), weight_decay=0.0001)
opt_scheduler = torch.optim.lr_scheduler.MultiStepLR(opt, milestones=[150])
print ("# epochs ", n_e)
r_vali_f_mae, r_vali_o_mae, r_test_f_mae, r_test_o_mae = list(), list(), list(), list()
r_vali_f_rmse, r_vali_o_rmse, r_test_f_rmse, r_test_o_rmse = list(), list(), list(), list()
flow_std_squ = np.power(f_o_mean_std[1], 2)
no_decrease = 0
for i in range(n_e):
print ("----------------an epoch starts-------------------")
#time1_s = time.time()
time_s = time.time()
print ("i_epoch: ", i)
n_train = len(train["flow"])
number_list = copy.copy(list(range(n_train)))
random.shuffle(number_list, random = r)
shuffle_idx = torch.tensor(number_list)
train_x_f, train_y_f = train["flow_mask"][shuffle_idx], train["flow"][shuffle_idx]
train_x_o, train_y_o = train["occupancy_mask"][shuffle_idx], train["occupancy"][shuffle_idx]
aver_loss, model, aver_p_loss, aver_f_loss, aver_o_loss =\
train_epoch(model, opt, criterion, train_x_f.to(device), train_y_f,\
train_x_o.to(device), train_y_o, hyper, flow_std_squ)
opt_scheduler.step()
total_phy_flow_occ_loss.append([aver_loss, aver_p_loss, aver_f_loss, aver_o_loss])
print ("train loss for this epoch: ", round(aver_loss, 6))
#evaluate
b_s_vt = hyper["b_s_vt"]
vali_f_mae, vali_f_rmse, vali_o_mae, vali_o_rmse =\
evaluate(model, vali_f, vali_o, vali_f_m, vali_o_m, f_o_mean_std, b_s_vt)
test_f_mae, test_f_rmse, test_o_mae, test_o_rmse =\
evaluate(model, test_f, test_o, test_f_m, test_o_m, f_o_mean_std, b_s_vt)
r_vali_f_mae.append(vali_f_mae)
r_test_f_mae.append(test_f_mae)
r_vali_o_mae.append(vali_o_mae)
r_test_o_mae.append(test_o_mae)
r_vali_f_rmse.append(vali_f_rmse)
r_test_f_rmse.append(test_f_rmse)
r_vali_o_rmse.append(vali_o_rmse)
r_test_o_rmse.append(test_o_rmse)
visualize_train_loss(total_phy_flow_occ_loss)
visualize_flow_loss(r_vali_f_mae, r_test_f_mae)
visualize_occ_loss(r_vali_o_mae, r_test_o_mae)
time_e = time.time()
print ("time for this epoch", time_e - time_s)
performance = {"train": total_phy_flow_occ_loss,\
"vali": [r_vali_f_mae, r_vali_f_rmse, r_vali_o_mae, r_vali_o_rmse],\
"test": [r_test_f_mae, r_test_f_rmse, r_test_o_mae, r_test_o_rmse]}
subfile = open(file_name + '/' + 'performance'+'.json','w')
json.dump(performance, subfile)
subfile.close()
#early stop
flow_std, occ_std = f_o_mean_std[1], f_o_mean_std[3]
norm_f_rmse, norm_o_rmse = vali_f_rmse/flow_std, vali_o_rmse/occ_std
norm_sum_mse = norm_f_rmse*norm_f_rmse + norm_o_rmse*norm_o_rmse
if n_mse_flow_occ > 0:
min_until_now = min([min_until_now, norm_sum_mse])
else:
min_until_now = 1000000.0
if norm_sum_mse > min_until_now:
no_decrease = no_decrease+1
else:
no_decrease = 0
if no_decrease == max_no_decrease:
print ("Early stop at the " + str(i+1) + "-th epoch")
return total_phy_flow_occ_loss, model
n_mse_flow_occ = n_mse_flow_occ + 1
print ("No_decrease: ", no_decrease)
return total_phy_flow_occ_loss, model
def tensorize(train_vali_test):
result = dict()
result["flow"] = torch.tensor(train_vali_test["flow"])
result["flow_mask"] = torch.tensor(train_vali_test["flow_mask"])
result["occupancy"] = torch.tensor(train_vali_test["occupancy"])
result["occupancy_mask"] = torch.tensor(train_vali_test["occupancy_mask"])
return result
def normalize_flow_occ(tvt, f_o_mean_std): #tvt: train, vali, test
#flow
f_mean, f_std = f_o_mean_std[0], f_o_mean_std[1]
f_mask, f = tvt["flow_mask"], tvt["flow"]
tvt["flow_mask"] = ((np.array(f_mask)-f_mean)/f_std).tolist()
tvt["flow"] = ((np.array(f)-f_mean)/f_std).tolist()
#occ
o_mean, o_std = f_o_mean_std[2], f_o_mean_std[3]
o_mask, o = tvt["occupancy_mask"], tvt["occupancy"]
tvt["occupancy_mask"] = ((np.array(o_mask)-o_mean)/o_std).tolist()
tvt["occupancy"] = ((np.array(o)-o_mean)/o_std).tolist()
return tvt
def transform_distance(d_matrix):
sigma, n_row, n_col = np.std(d_matrix), len(d_matrix), len(d_matrix[0])
sigma_square = sigma*sigma
for i in range(n_row):
for j in range(n_col):
d_i_j = d_matrix[i][j]
d_matrix[i][j] = np.exp(0.0-10000.0*d_i_j*d_i_j/sigma_square)
return d_matrix
def load_data(train_path, vali_path, test_path, sensor_adj_path, mean_std_path, sensor_id_path):
mean_std = json.load(open(mean_std_path))
f_mean, f_std, o_mean, o_std =\
mean_std["f_mean"], mean_std["f_std"], mean_std["o_mean"], mean_std["o_std"]
f_o_mean_std = [f_mean, f_std, o_mean, o_std]
train = json.load(open(train_path))
vali = json.load(open(vali_path))
test = json.load(open(test_path))
adj = json.load(open(sensor_adj_path))["adj"]
n_sensor = len(train["flow"][0])
train = tensorize(normalize_flow_occ(train, f_o_mean_std))
vali = tensorize(normalize_flow_occ(vali, f_o_mean_std))
test = tensorize(normalize_flow_occ(test, f_o_mean_std))
adj = torch.tensor(transform_distance(adj), device=device).float()
df_sensor_id = json.load(open(sensor_id_path))
sensor_length = [0.0 for i in range(n_sensor)]
for sensor in df_sensor_id:
sensor_length[df_sensor_id[sensor][0]] = df_sensor_id[sensor][3]
return train, vali, test, adj, n_sensor, f_o_mean_std, sensor_length
#6.1 load the data
time1 = time.time()
train, vali, test, adj, n_sensor, f_o_mean_std, sensor_length =\
load_data(train_path, vali_path, test_path, sensor_adj_path, mean_std_path, sensor_id_path)
time2 = time.time()
print (time2-time1)
10.49973177909851
print (len(train["flow"]))
print (len(vali["flow"]))
print (len(test["flow"]))
print (f_o_mean_std)
2007 663 663 [240.33475289317576, 220.77174415099844, 0.13747677267259475, 0.19174061724441546]
model = NMFD_GNN(n_sensor, M, hyper_model, f_o_mean_std, sensor_length, adj).to(device)
cri = nn.MSELoss()
#6.2: train the model
total_phy_flow_occ_loss, trained_model = train_process(model, cri, train, vali, test, hyper, f_o_mean_std)
# epochs 200 ----------------an epoch starts------------------- i_epoch: 0 # batch: 125 i_batch: 0.0 the loss for this batch: 1.959289 flow loss 0.92753893 occ loss 0.9138899 time for this batch 0.4802360534667969 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.59473425 flow loss 0.17845821 occ loss 0.29241276 time for this batch 0.25343966484069824 ---------------------------------- train loss for this epoch: 0.737723
time for this epoch 38.00006556510925 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 1 # batch: 125 i_batch: 0.0 the loss for this batch: 0.48059762 flow loss 0.16669507 occ loss 0.21912152 time for this batch 0.19287514686584473 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.5157336 flow loss 0.13809633 occ loss 0.23829739 time for this batch 0.2552754878997803 ---------------------------------- train loss for this epoch: 0.540192
time for this epoch 36.67076396942139 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 2 # batch: 125 i_batch: 0.0 the loss for this batch: 0.47966182 flow loss 0.14806265 occ loss 0.20590799 time for this batch 0.19881558418273926 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.5140161 flow loss 0.13528827 occ loss 0.23756789 time for this batch 0.2600061893463135 ---------------------------------- train loss for this epoch: 0.498417
time for this epoch 36.96955418586731 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 3 # batch: 125 i_batch: 0.0 the loss for this batch: 0.4728306 flow loss 0.12299024 occ loss 0.21988808 time for this batch 0.19420742988586426 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.58723855 flow loss 0.12599048 occ loss 0.2767636 time for this batch 0.23760199546813965 ---------------------------------- train loss for this epoch: 0.476829
time for this epoch 37.592716455459595 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 4 # batch: 125 i_batch: 0.0 the loss for this batch: 0.4706776 flow loss 0.11022968 occ loss 0.23111342 time for this batch 0.19683146476745605 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.4324484 flow loss 0.11187602 occ loss 0.21123557 time for this batch 0.2661583423614502 ---------------------------------- train loss for this epoch: 0.463017
time for this epoch 38.27885055541992 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 5 # batch: 125 i_batch: 0.0 the loss for this batch: 0.416869 flow loss 0.102207914 occ loss 0.19225225 time for this batch 0.19225478172302246 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.64358157 flow loss 0.13315484 occ loss 0.31668878 time for this batch 0.25696682929992676 ---------------------------------- train loss for this epoch: 0.453746
time for this epoch 37.85625100135803 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 6 # batch: 125 i_batch: 0.0 the loss for this batch: 0.33180904 flow loss 0.094735734 occ loss 0.15165685 time for this batch 0.21249938011169434 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.4602374 flow loss 0.12455013 occ loss 0.21816705 time for this batch 0.25021862983703613 ---------------------------------- train loss for this epoch: 0.446921
time for this epoch 37.852065086364746 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 7 # batch: 125 i_batch: 0.0 the loss for this batch: 0.52345604 flow loss 0.12549119 occ loss 0.2584214 time for this batch 0.2016158103942871 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.3521286 flow loss 0.0905769 occ loss 0.15607712 time for this batch 0.26697230339050293 ---------------------------------- train loss for this epoch: 0.441621
time for this epoch 37.59002494812012 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 8 # batch: 125 i_batch: 0.0 the loss for this batch: 0.39308792 flow loss 0.108682245 occ loss 0.179894 time for this batch 0.21093010902404785 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.2919426 flow loss 0.08452475 occ loss 0.13565709 time for this batch 0.24441909790039062 ---------------------------------- train loss for this epoch: 0.439042
time for this epoch 36.16402554512024 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 9 # batch: 125 i_batch: 0.0 the loss for this batch: 0.54956466 flow loss 0.119372636 occ loss 0.28348154 time for this batch 0.2553231716156006 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.46013403 flow loss 0.10121702 occ loss 0.22720988 time for this batch 0.2712819576263428 ---------------------------------- train loss for this epoch: 0.433369
time for this epoch 45.21177339553833 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 10 # batch: 125 i_batch: 0.0 the loss for this batch: 0.4187125 flow loss 0.09432088 occ loss 0.22096041 time for this batch 0.2601170539855957 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.2643367 flow loss 0.06935189 occ loss 0.118046895 time for this batch 0.2931089401245117 ---------------------------------- train loss for this epoch: 0.429429
time for this epoch 46.646819829940796 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 11 # batch: 125 i_batch: 0.0 the loss for this batch: 0.3350133 flow loss 0.098184235 occ loss 0.14688812 time for this batch 0.2669343948364258 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.45446497 flow loss 0.097052634 occ loss 0.22411409 time for this batch 0.2885136604309082 ---------------------------------- train loss for this epoch: 0.427147
time for this epoch 45.65103030204773 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 12 # batch: 125 i_batch: 0.0 the loss for this batch: 0.33135796 flow loss 0.082647264 occ loss 0.15924092 time for this batch 0.26223230361938477 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.4445315 flow loss 0.10487986 occ loss 0.20875265 time for this batch 0.29975390434265137 ---------------------------------- train loss for this epoch: 0.423688
time for this epoch 47.46991491317749 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 13 # batch: 125 i_batch: 0.0 the loss for this batch: 0.5714424 flow loss 0.11036717 occ loss 0.28938106 time for this batch 0.27007484436035156 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.37835303 flow loss 0.09338489 occ loss 0.17357412 time for this batch 0.2828230857849121 ---------------------------------- train loss for this epoch: 0.419905
time for this epoch 46.905152320861816 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 14 # batch: 125 i_batch: 0.0 the loss for this batch: 0.42740804 flow loss 0.083769955 occ loss 0.21176513 time for this batch 0.2709805965423584 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.5046829 flow loss 0.10940559 occ loss 0.24537106 time for this batch 0.29168128967285156 ---------------------------------- train loss for this epoch: 0.416588
time for this epoch 47.52523732185364 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 15 # batch: 125 i_batch: 0.0 the loss for this batch: 0.42384195 flow loss 0.10045713 occ loss 0.18276428 time for this batch 0.26361608505249023 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.4878999 flow loss 0.09112033 occ loss 0.23374093 time for this batch 0.3021352291107178 ---------------------------------- train loss for this epoch: 0.417718
time for this epoch 47.66518020629883 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 16 # batch: 125 i_batch: 0.0 the loss for this batch: 0.39250967 flow loss 0.08170666 occ loss 0.19030109 time for this batch 0.26232481002807617 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.3917481 flow loss 0.0891511 occ loss 0.18132964 time for this batch 0.28710198402404785 ---------------------------------- train loss for this epoch: 0.411625
time for this epoch 47.221314907073975 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 17 # batch: 125 i_batch: 0.0 the loss for this batch: 0.4230513 flow loss 0.08667917 occ loss 0.22745931 time for this batch 0.2687854766845703 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.34618765 flow loss 0.08897135 occ loss 0.14056604 time for this batch 0.2840456962585449 ---------------------------------- train loss for this epoch: 0.409476
time for this epoch 46.65033435821533 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 18 # batch: 125 i_batch: 0.0 the loss for this batch: 0.35983163 flow loss 0.07884595 occ loss 0.16964246 time for this batch 0.2624506950378418 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.3364085 flow loss 0.08321754 occ loss 0.14889129 time for this batch 0.2702476978302002 ---------------------------------- train loss for this epoch: 0.409824
time for this epoch 47.45305109024048 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 19 # batch: 125 i_batch: 0.0 the loss for this batch: 0.46267447 flow loss 0.09937463 occ loss 0.21798404 time for this batch 0.2614743709564209 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.29497832 flow loss 0.06835397 occ loss 0.13864315 time for this batch 0.2990109920501709 ---------------------------------- train loss for this epoch: 0.407845
time for this epoch 47.504754304885864 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 20 # batch: 125 i_batch: 0.0 the loss for this batch: 0.2915889 flow loss 0.068661 occ loss 0.13700469 time for this batch 0.2632486820220947 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.2902813 flow loss 0.07092147 occ loss 0.12926641 time for this batch 0.28574609756469727 ---------------------------------- train loss for this epoch: 0.40546
time for this epoch 47.56872272491455 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 21 # batch: 125 i_batch: 0.0 the loss for this batch: 0.49788767 flow loss 0.120698765 occ loss 0.23262958 time for this batch 0.25696611404418945 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.3943981 flow loss 0.08965141 occ loss 0.17682514 time for this batch 0.2933979034423828 ---------------------------------- train loss for this epoch: 0.403744
time for this epoch 47.80527949333191 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 22 # batch: 125 i_batch: 0.0 the loss for this batch: 0.44866592 flow loss 0.0998876 occ loss 0.22185567 time for this batch 0.2718372344970703 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.34530193 flow loss 0.07492229 occ loss 0.15192437 time for this batch 0.29611968994140625 ---------------------------------- train loss for this epoch: 0.40274
time for this epoch 47.01766610145569 No_decrease: 3 ----------------an epoch starts------------------- i_epoch: 23 # batch: 125 i_batch: 0.0 the loss for this batch: 0.418509 flow loss 0.102042794 occ loss 0.19274949 time for this batch 0.24262666702270508 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.31785348 flow loss 0.07883911 occ loss 0.13997953 time for this batch 0.2655925750732422 ---------------------------------- train loss for this epoch: 0.400665
time for this epoch 47.726945638656616 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 24 # batch: 125 i_batch: 0.0 the loss for this batch: 0.3645308 flow loss 0.07810803 occ loss 0.16517538 time for this batch 0.26447153091430664 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.4077364 flow loss 0.08778158 occ loss 0.17455779 time for this batch 0.28083109855651855 ---------------------------------- train loss for this epoch: 0.398011
time for this epoch 46.70844745635986 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 25 # batch: 125 i_batch: 0.0 the loss for this batch: 0.44503194 flow loss 0.09083409 occ loss 0.21526867 time for this batch 0.2555885314941406 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.43556416 flow loss 0.091708556 occ loss 0.18888828 time for this batch 0.2947070598602295 ---------------------------------- train loss for this epoch: 0.398497
time for this epoch 47.3539662361145 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 26 # batch: 125 i_batch: 0.0 the loss for this batch: 0.412768 flow loss 0.08878282 occ loss 0.18720919 time for this batch 0.2628636360168457 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.31031597 flow loss 0.078314014 occ loss 0.1342267 time for this batch 0.28699183464050293 ---------------------------------- train loss for this epoch: 0.395069
time for this epoch 46.965550661087036 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 27 # batch: 125 i_batch: 0.0 the loss for this batch: 0.3067327 flow loss 0.07762797 occ loss 0.1351566 time for this batch 0.2565934658050537 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.32832903 flow loss 0.080268584 occ loss 0.14597502 time for this batch 0.2889223098754883 ---------------------------------- train loss for this epoch: 0.395264
time for this epoch 46.37427854537964 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 28 # batch: 125 i_batch: 0.0 the loss for this batch: 0.37634522 flow loss 0.08605921 occ loss 0.16860029 time for this batch 0.2674839496612549 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.4284904 flow loss 0.10132648 occ loss 0.21117404 time for this batch 0.2874772548675537 ---------------------------------- train loss for this epoch: 0.392456
time for this epoch 46.98146724700928 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 29 # batch: 125 i_batch: 0.0 the loss for this batch: 0.4541524 flow loss 0.09205367 occ loss 0.23282531 time for this batch 0.25710344314575195 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.4323855 flow loss 0.09292131 occ loss 0.20266278 time for this batch 0.2973668575286865 ---------------------------------- train loss for this epoch: 0.392751
time for this epoch 47.455841064453125 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 30 # batch: 125 i_batch: 0.0 the loss for this batch: 0.27439845 flow loss 0.067245804 occ loss 0.122441836 time for this batch 0.26897597312927246 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.42500368 flow loss 0.09150826 occ loss 0.20604141 time for this batch 0.2961695194244385 ---------------------------------- train loss for this epoch: 0.391515
time for this epoch 47.301703214645386 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 31 # batch: 125 i_batch: 0.0 the loss for this batch: 0.28884265 flow loss 0.07207749 occ loss 0.12557624 time for this batch 0.26438283920288086 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.39898837 flow loss 0.08940457 occ loss 0.17881626 time for this batch 0.2889690399169922 ---------------------------------- train loss for this epoch: 0.389068
time for this epoch 47.51772880554199 No_decrease: 3 ----------------an epoch starts------------------- i_epoch: 32 # batch: 125 i_batch: 0.0 the loss for this batch: 0.23367536 flow loss 0.06489787 occ loss 0.09600489 time for this batch 0.25600171089172363 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.3911742 flow loss 0.080996245 occ loss 0.18815048 time for this batch 0.2231278419494629 ---------------------------------- train loss for this epoch: 0.389523
time for this epoch 47.594916582107544 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 33 # batch: 125 i_batch: 0.0 the loss for this batch: 0.24039307 flow loss 0.056729022 occ loss 0.10622925 time for this batch 0.2486410140991211 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.4404741 flow loss 0.09700682 occ loss 0.19926481 time for this batch 0.2882075309753418 ---------------------------------- train loss for this epoch: 0.387316
time for this epoch 47.630085468292236 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 34 # batch: 125 i_batch: 0.0 the loss for this batch: 0.35311344 flow loss 0.07847873 occ loss 0.15382746 time for this batch 0.27048778533935547 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.29667008 flow loss 0.07045247 occ loss 0.12802109 time for this batch 0.2643246650695801 ---------------------------------- train loss for this epoch: 0.382171
time for this epoch 48.75939750671387 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 35 # batch: 125 i_batch: 0.0 the loss for this batch: 0.46338433 flow loss 0.111042395 occ loss 0.2024792 time for this batch 0.2714364528656006 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.2915996 flow loss 0.07199733 occ loss 0.12921262 time for this batch 0.2854917049407959 ---------------------------------- train loss for this epoch: 0.376363
time for this epoch 47.250969648361206 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 36 # batch: 125 i_batch: 0.0 the loss for this batch: 0.39998263 flow loss 0.090210475 occ loss 0.19191778 time for this batch 0.26799535751342773 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.42443103 flow loss 0.091639824 occ loss 0.19844736 time for this batch 0.30698323249816895 ---------------------------------- train loss for this epoch: 0.366044
time for this epoch 47.482481479644775 No_decrease: 3 ----------------an epoch starts------------------- i_epoch: 37 # batch: 125 i_batch: 0.0 the loss for this batch: 0.43580437 flow loss 0.096318945 occ loss 0.23350309 time for this batch 0.26986002922058105 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.37136889 flow loss 0.09631095 occ loss 0.18298393 time for this batch 0.29846906661987305 ---------------------------------- train loss for this epoch: 0.348983
time for this epoch 47.231112480163574 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 38 # batch: 125 i_batch: 0.0 the loss for this batch: 0.296546 flow loss 0.07844511 occ loss 0.14820437 time for this batch 0.2792685031890869 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.3505431 flow loss 0.08256437 occ loss 0.19833578 time for this batch 0.29732465744018555 ---------------------------------- train loss for this epoch: 0.325556
time for this epoch 47.39773225784302 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 39 # batch: 125 i_batch: 0.0 the loss for this batch: 0.3436501 flow loss 0.08452194 occ loss 0.20034897 time for this batch 0.2533986568450928 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.3328058 flow loss 0.09064751 occ loss 0.18051228 time for this batch 0.3079185485839844 ---------------------------------- train loss for this epoch: 0.300443
time for this epoch 48.187466859817505 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 40 # batch: 125 i_batch: 0.0 the loss for this batch: 0.28739437 flow loss 0.07468592 occ loss 0.16782029 time for this batch 0.2794830799102783 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.28428668 flow loss 0.07583121 occ loss 0.18329133 time for this batch 0.28107285499572754 ---------------------------------- train loss for this epoch: 0.275882
time for this epoch 46.405744791030884 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 41 # batch: 125 i_batch: 0.0 the loss for this batch: 0.23770827 flow loss 0.06914614 occ loss 0.14781004 time for this batch 0.248976469039917 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.23045835 flow loss 0.06752442 occ loss 0.15258977 time for this batch 0.2850306034088135 ---------------------------------- train loss for this epoch: 0.257659
time for this epoch 47.07635474205017 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 42 # batch: 125 i_batch: 0.0 the loss for this batch: 0.23718418 flow loss 0.06734822 occ loss 0.16217753 time for this batch 0.2717440128326416 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.2755147 flow loss 0.08132602 occ loss 0.18819109 time for this batch 0.29810357093811035 ---------------------------------- train loss for this epoch: 0.247737
time for this epoch 46.57313895225525 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 43 # batch: 125 i_batch: 0.0 the loss for this batch: 0.24539429 flow loss 0.0675629 occ loss 0.17462945 time for this batch 0.25757503509521484 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.15256616 flow loss 0.048421364 occ loss 0.10329175 time for this batch 0.2745940685272217 ---------------------------------- train loss for this epoch: 0.243441
time for this epoch 46.3897705078125 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 44 # batch: 125 i_batch: 0.0 the loss for this batch: 0.2314665 flow loss 0.06307741 occ loss 0.16744214 time for this batch 0.2849574089050293 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.2269302 flow loss 0.061634675 occ loss 0.16420202 time for this batch 0.2658979892730713 ---------------------------------- train loss for this epoch: 0.241202
time for this epoch 48.51225018501282 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 45 # batch: 125 i_batch: 0.0 the loss for this batch: 0.21483414 flow loss 0.064058706 occ loss 0.14984004 time for this batch 0.2726607322692871 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.27593678 flow loss 0.074824624 occ loss 0.20031178 time for this batch 0.27135443687438965 ---------------------------------- train loss for this epoch: 0.240802
time for this epoch 47.41563558578491 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 46 # batch: 125 i_batch: 0.0 the loss for this batch: 0.20039739 flow loss 0.06356265 occ loss 0.1363557 time for this batch 0.3732285499572754 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.2798099 flow loss 0.07495467 occ loss 0.20430468 time for this batch 0.2993950843811035 ---------------------------------- train loss for this epoch: 0.239247
time for this epoch 48.37431359291077 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 47 # batch: 125 i_batch: 0.0 the loss for this batch: 0.25683767 flow loss 0.078935556 occ loss 0.17718922 time for this batch 0.2692525386810303 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.23964356 flow loss 0.06743676 occ loss 0.17182566 time for this batch 0.2926509380340576 ---------------------------------- train loss for this epoch: 0.240788
time for this epoch 48.26279950141907 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 48 # batch: 125 i_batch: 0.0 the loss for this batch: 0.26828057 flow loss 0.07082041 occ loss 0.19688798 time for this batch 0.2819225788116455 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.22456808 flow loss 0.06732326 occ loss 0.15656507 time for this batch 0.3030202388763428 ---------------------------------- train loss for this epoch: 0.239062
time for this epoch 47.69763469696045 No_decrease: 3 ----------------an epoch starts------------------- i_epoch: 49 # batch: 125 i_batch: 0.0 the loss for this batch: 0.26636764 flow loss 0.068363175 occ loss 0.19763097 time for this batch 0.2792949676513672 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.30866557 flow loss 0.07485457 occ loss 0.2330326 time for this batch 0.29741358757019043 ---------------------------------- train loss for this epoch: 0.237271
time for this epoch 47.349812746047974 No_decrease: 4 ----------------an epoch starts------------------- i_epoch: 50 # batch: 125 i_batch: 0.0 the loss for this batch: 0.22324823 flow loss 0.069190346 occ loss 0.15355076 time for this batch 0.27197265625 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.16664316 flow loss 0.04995643 occ loss 0.116331205 time for this batch 0.2998771667480469 ---------------------------------- train loss for this epoch: 0.237935
time for this epoch 48.200422525405884 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 51 # batch: 125 i_batch: 0.0 the loss for this batch: 0.2727035 flow loss 0.07080074 occ loss 0.20134506 time for this batch 0.27256226539611816 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.1713831 flow loss 0.04863291 occ loss 0.121698916 time for this batch 0.30940747261047363 ---------------------------------- train loss for this epoch: 0.23868
time for this epoch 47.09681558609009 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 52 # batch: 125 i_batch: 0.0 the loss for this batch: 0.13941409 flow loss 0.04215867 occ loss 0.096497305 time for this batch 0.2797548770904541 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.29541177 flow loss 0.07510733 occ loss 0.21970639 time for this batch 0.2912898063659668 ---------------------------------- train loss for this epoch: 0.237047
time for this epoch 47.35651183128357 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 53 # batch: 125 i_batch: 0.0 the loss for this batch: 0.35284144 flow loss 0.08475338 occ loss 0.26748154 time for this batch 0.27117085456848145 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.2690772 flow loss 0.069045715 occ loss 0.19943127 time for this batch 0.30091047286987305 ---------------------------------- train loss for this epoch: 0.235417
time for this epoch 48.28002858161926 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 54 # batch: 125 i_batch: 0.0 the loss for this batch: 0.25620937 flow loss 0.067624174 occ loss 0.18801177 time for this batch 0.2547481060028076 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.29283845 flow loss 0.0787865 occ loss 0.2133601 time for this batch 0.30998849868774414 ---------------------------------- train loss for this epoch: 0.236756
time for this epoch 48.18000411987305 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 55 # batch: 125 i_batch: 0.0 the loss for this batch: 0.24645151 flow loss 0.070007496 occ loss 0.17593475 time for this batch 0.28601861000061035 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.23015429 flow loss 0.06356804 occ loss 0.16587488 time for this batch 0.28065013885498047 ---------------------------------- train loss for this epoch: 0.235598
time for this epoch 47.7903048992157 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 56 # batch: 125 i_batch: 0.0 the loss for this batch: 0.2139706 flow loss 0.0602513 occ loss 0.15331014 time for this batch 0.2564702033996582 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.22260019 flow loss 0.062479157 occ loss 0.15969862 time for this batch 0.30275583267211914 ---------------------------------- train loss for this epoch: 0.235903
time for this epoch 47.84366178512573 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 57 # batch: 125 i_batch: 0.0 the loss for this batch: 0.23606679 flow loss 0.06772849 occ loss 0.16757397 time for this batch 0.25382399559020996 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.26340023 flow loss 0.070670836 occ loss 0.1922354 time for this batch 0.2918412685394287 ---------------------------------- train loss for this epoch: 0.23494
time for this epoch 47.196444034576416 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 58 # batch: 125 i_batch: 0.0 the loss for this batch: 0.29792956 flow loss 0.0783417 occ loss 0.21901394 time for this batch 0.2775444984436035 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.21439281 flow loss 0.05808028 occ loss 0.15543032 time for this batch 0.2991909980773926 ---------------------------------- train loss for this epoch: 0.234092
time for this epoch 47.8333044052124 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 59 # batch: 125 i_batch: 0.0 the loss for this batch: 0.1880255 flow loss 0.057341736 occ loss 0.13022833 time for this batch 0.25816822052001953 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.22843714 flow loss 0.062383436 occ loss 0.16560824 time for this batch 0.29415154457092285 ---------------------------------- train loss for this epoch: 0.234417
time for this epoch 47.700819969177246 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 60 # batch: 125 i_batch: 0.0 the loss for this batch: 0.21496017 flow loss 0.065260336 occ loss 0.14898553 time for this batch 0.2677130699157715 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.2359712 flow loss 0.06138767 occ loss 0.17422217 time for this batch 0.30955982208251953 ---------------------------------- train loss for this epoch: 0.233796
time for this epoch 47.64929723739624 No_decrease: 3 ----------------an epoch starts------------------- i_epoch: 61 # batch: 125 i_batch: 0.0 the loss for this batch: 0.24167429 flow loss 0.067171924 occ loss 0.17397927 time for this batch 0.2646932601928711 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.18281852 flow loss 0.05205905 occ loss 0.13042518 time for this batch 0.2927844524383545 ---------------------------------- train loss for this epoch: 0.235332
time for this epoch 47.76959300041199 No_decrease: 4 ----------------an epoch starts------------------- i_epoch: 62 # batch: 125 i_batch: 0.0 the loss for this batch: 0.19509473 flow loss 0.05634137 occ loss 0.1380814 time for this batch 0.2852442264556885 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.28213617 flow loss 0.07716885 occ loss 0.20460531 time for this batch 0.3052821159362793 ---------------------------------- train loss for this epoch: 0.232735
time for this epoch 48.84825921058655 No_decrease: 5 ----------------an epoch starts------------------- i_epoch: 63 # batch: 125 i_batch: 0.0 the loss for this batch: 0.23396996 flow loss 0.061190832 occ loss 0.17203721 time for this batch 0.25554752349853516 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.2360678 flow loss 0.07021848 occ loss 0.16518255 time for this batch 0.26607370376586914 ---------------------------------- train loss for this epoch: 0.234075
time for this epoch 47.29732322692871 No_decrease: 6 ----------------an epoch starts------------------- i_epoch: 64 # batch: 125 i_batch: 0.0 the loss for this batch: 0.25965583 flow loss 0.06890891 occ loss 0.19027618 time for this batch 0.24835205078125 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.3050441 flow loss 0.07413155 occ loss 0.23028027 time for this batch 0.2926933765411377 ---------------------------------- train loss for this epoch: 0.232585
time for this epoch 47.031018018722534 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 65 # batch: 125 i_batch: 0.0 the loss for this batch: 0.24079485 flow loss 0.063501336 occ loss 0.17678735 time for this batch 0.2697944641113281 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.30485502 flow loss 0.07766143 occ loss 0.2267104 time for this batch 0.2855370044708252 ---------------------------------- train loss for this epoch: 0.232194
time for this epoch 47.57653856277466 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 66 # batch: 125 i_batch: 0.0 the loss for this batch: 0.17371188 flow loss 0.048919145 occ loss 0.124524325 time for this batch 0.2618741989135742 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.2249752 flow loss 0.071370274 occ loss 0.1532133 time for this batch 0.29657721519470215 ---------------------------------- train loss for this epoch: 0.23117
time for this epoch 47.13095951080322 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 67 # batch: 125 i_batch: 0.0 the loss for this batch: 0.27005395 flow loss 0.07234163 occ loss 0.19702254 time for this batch 0.27483582496643066 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.21530893 flow loss 0.060387347 occ loss 0.15446617 time for this batch 0.2981557846069336 ---------------------------------- train loss for this epoch: 0.23207
time for this epoch 46.9542191028595 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 68 # batch: 125 i_batch: 0.0 the loss for this batch: 0.18100055 flow loss 0.051907092 occ loss 0.12857153 time for this batch 0.25880885124206543 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.2605554 flow loss 0.06787059 occ loss 0.19211207 time for this batch 0.29389500617980957 ---------------------------------- train loss for this epoch: 0.231281
time for this epoch 47.23220944404602 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 69 # batch: 125 i_batch: 0.0 the loss for this batch: 0.18186025 flow loss 0.058224205 occ loss 0.123344466 time for this batch 0.26398396492004395 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.16653484 flow loss 0.04572148 occ loss 0.120395884 time for this batch 0.2904846668243408 ---------------------------------- train loss for this epoch: 0.231279
time for this epoch 46.98932671546936 No_decrease: 3 ----------------an epoch starts------------------- i_epoch: 70 # batch: 125 i_batch: 0.0 the loss for this batch: 0.25979966 flow loss 0.07533515 occ loss 0.18389452 time for this batch 0.2309737205505371 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.22767152 flow loss 0.06324739 occ loss 0.1638423 time for this batch 0.2896761894226074 ---------------------------------- train loss for this epoch: 0.231288
time for this epoch 47.460524797439575 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 71 # batch: 125 i_batch: 0.0 the loss for this batch: 0.28091475 flow loss 0.07218766 occ loss 0.20804696 time for this batch 0.27445483207702637 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.25636625 flow loss 0.0704892 occ loss 0.18506435 time for this batch 0.3061103820800781 ---------------------------------- train loss for this epoch: 0.230683
time for this epoch 47.738752126693726 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 72 # batch: 125 i_batch: 0.0 the loss for this batch: 0.25412273 flow loss 0.067727044 occ loss 0.18588501 time for this batch 0.25240373611450195 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.23571154 flow loss 0.06349738 occ loss 0.17167677 time for this batch 0.28758931159973145 ---------------------------------- train loss for this epoch: 0.230394
time for this epoch 46.38420510292053 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 73 # batch: 125 i_batch: 0.0 the loss for this batch: 0.19833606 flow loss 0.058276743 occ loss 0.13966985 time for this batch 0.26000070571899414 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.25358826 flow loss 0.07224694 occ loss 0.18087032 time for this batch 0.28642845153808594 ---------------------------------- train loss for this epoch: 0.228942
time for this epoch 47.2062132358551 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 74 # batch: 125 i_batch: 0.0 the loss for this batch: 0.32707146 flow loss 0.08198268 occ loss 0.24460237 time for this batch 0.2936568260192871 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.2188846 flow loss 0.06252908 occ loss 0.1559202 time for this batch 0.29698920249938965 ---------------------------------- train loss for this epoch: 0.230326
time for this epoch 46.67064547538757 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 75 # batch: 125 i_batch: 0.0 the loss for this batch: 0.20026442 flow loss 0.056559317 occ loss 0.14338417 time for this batch 0.3498563766479492 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.27336994 flow loss 0.07181041 occ loss 0.20080796 time for this batch 0.2871274948120117 ---------------------------------- train loss for this epoch: 0.22978
time for this epoch 47.31728219985962 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 76 # batch: 125 i_batch: 0.0 the loss for this batch: 0.23200454 flow loss 0.05780559 occ loss 0.17359763 time for this batch 0.24608087539672852 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.16667335 flow loss 0.046726134 occ loss 0.119713694 time for this batch 0.30629634857177734 ---------------------------------- train loss for this epoch: 0.228306
time for this epoch 47.5741229057312 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 77 # batch: 125 i_batch: 0.0 the loss for this batch: 0.21628083 flow loss 0.060508426 occ loss 0.15505117 time for this batch 0.27470850944519043 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.22555062 flow loss 0.064694345 occ loss 0.16044168 time for this batch 0.2881901264190674 ---------------------------------- train loss for this epoch: 0.229623
time for this epoch 46.915695667266846 No_decrease: 3 ----------------an epoch starts------------------- i_epoch: 78 # batch: 125 i_batch: 0.0 the loss for this batch: 0.27834538 flow loss 0.07392144 occ loss 0.20372245 time for this batch 0.25233030319213867 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.20504577 flow loss 0.058949284 occ loss 0.14560613 time for this batch 0.30225038528442383 ---------------------------------- train loss for this epoch: 0.227659
time for this epoch 46.90415620803833 No_decrease: 4 ----------------an epoch starts------------------- i_epoch: 79 # batch: 125 i_batch: 0.0 the loss for this batch: 0.2505478 flow loss 0.06557221 occ loss 0.18446475 time for this batch 0.2692556381225586 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.26653948 flow loss 0.0730037 occ loss 0.19294351 time for this batch 0.2814652919769287 ---------------------------------- train loss for this epoch: 0.228557
time for this epoch 47.55026125907898 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 80 # batch: 125 i_batch: 0.0 the loss for this batch: 0.2493121 flow loss 0.06419127 occ loss 0.18470189 time for this batch 0.27361345291137695 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.19995093 flow loss 0.055225525 occ loss 0.1443886 time for this batch 0.28985142707824707 ---------------------------------- train loss for this epoch: 0.228305
time for this epoch 47.22338628768921 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 81 # batch: 125 i_batch: 0.0 the loss for this batch: 0.23140326 flow loss 0.06325287 occ loss 0.16755438 time for this batch 0.2597935199737549 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.23868135 flow loss 0.06451139 occ loss 0.173833 time for this batch 0.30531859397888184 ---------------------------------- train loss for this epoch: 0.228037
time for this epoch 47.36562967300415 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 82 # batch: 125 i_batch: 0.0 the loss for this batch: 0.23604919 flow loss 0.06889221 occ loss 0.1662976 time for this batch 0.2612295150756836 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.19545165 flow loss 0.054126956 occ loss 0.14066163 time for this batch 0.2794487476348877 ---------------------------------- train loss for this epoch: 0.228242
time for this epoch 46.55345892906189 No_decrease: 3 ----------------an epoch starts------------------- i_epoch: 83 # batch: 125 i_batch: 0.0 the loss for this batch: 0.28111106 flow loss 0.072440974 occ loss 0.20810975 time for this batch 0.2687239646911621 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.27600148 flow loss 0.07044809 occ loss 0.20444429 time for this batch 0.2986612319946289 ---------------------------------- train loss for this epoch: 0.226867
time for this epoch 47.11930966377258 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 84 # batch: 125 i_batch: 0.0 the loss for this batch: 0.21470536 flow loss 0.056261625 occ loss 0.15780936 time for this batch 0.2536649703979492 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.24469492 flow loss 0.0689421 occ loss 0.17518108 time for this batch 0.2734518051147461 ---------------------------------- train loss for this epoch: 0.225967
time for this epoch 47.21006989479065 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 85 # batch: 125 i_batch: 0.0 the loss for this batch: 0.26767802 flow loss 0.078819305 occ loss 0.18792738 time for this batch 0.2561836242675781 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.16610686 flow loss 0.051262695 occ loss 0.11421365 time for this batch 0.2981531620025635 ---------------------------------- train loss for this epoch: 0.227459
time for this epoch 47.55588746070862 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 86 # batch: 125 i_batch: 0.0 the loss for this batch: 0.19356786 flow loss 0.057555374 occ loss 0.13554105 time for this batch 0.2684750556945801 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.2145754 flow loss 0.064814776 occ loss 0.14929502 time for this batch 0.2997293472290039 ---------------------------------- train loss for this epoch: 0.226646
time for this epoch 47.334171295166016 No_decrease: 3 ----------------an epoch starts------------------- i_epoch: 87 # batch: 125 i_batch: 0.0 the loss for this batch: 0.18599808 flow loss 0.054103717 occ loss 0.13150391 time for this batch 0.2707970142364502 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.2681622 flow loss 0.067158625 occ loss 0.20059426 time for this batch 0.29264235496520996 ---------------------------------- train loss for this epoch: 0.225464
time for this epoch 47.42380928993225 No_decrease: 4 ----------------an epoch starts------------------- i_epoch: 88 # batch: 125 i_batch: 0.0 the loss for this batch: 0.1909822 flow loss 0.05608552 occ loss 0.13424157 time for this batch 0.2670705318450928 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.23145758 flow loss 0.06371058 occ loss 0.16745135 time for this batch 0.2684352397918701 ---------------------------------- train loss for this epoch: 0.226172
time for this epoch 47.32651114463806 No_decrease: 5 ----------------an epoch starts------------------- i_epoch: 89 # batch: 125 i_batch: 0.0 the loss for this batch: 0.15951857 flow loss 0.04690537 occ loss 0.11226001 time for this batch 0.26658129692077637 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.24660812 flow loss 0.061599154 occ loss 0.18435436 time for this batch 0.2925376892089844 ---------------------------------- train loss for this epoch: 0.22407
time for this epoch 47.55641770362854 No_decrease: 6 ----------------an epoch starts------------------- i_epoch: 90 # batch: 125 i_batch: 0.0 the loss for this batch: 0.22472997 flow loss 0.060064077 occ loss 0.16409431 time for this batch 0.25827550888061523 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.19661292 flow loss 0.05509768 occ loss 0.14096376 time for this batch 0.28626251220703125 ---------------------------------- train loss for this epoch: 0.225065
time for this epoch 47.52484893798828 No_decrease: 7 ----------------an epoch starts------------------- i_epoch: 91 # batch: 125 i_batch: 0.0 the loss for this batch: 0.34697008 flow loss 0.08924756 occ loss 0.2572858 time for this batch 0.24636149406433105 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.22945014 flow loss 0.06435562 occ loss 0.16447224 time for this batch 0.29666709899902344 ---------------------------------- train loss for this epoch: 0.225477
time for this epoch 46.87674880027771 No_decrease: 8 ----------------an epoch starts------------------- i_epoch: 92 # batch: 125 i_batch: 0.0 the loss for this batch: 0.20048472 flow loss 0.053577602 occ loss 0.14575236 time for this batch 0.2670106887817383 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.22200595 flow loss 0.057517603 occ loss 0.16383867 time for this batch 0.28465819358825684 ---------------------------------- train loss for this epoch: 0.224886
time for this epoch 47.28799223899841 No_decrease: 9 ----------------an epoch starts------------------- i_epoch: 93 # batch: 125 i_batch: 0.0 the loss for this batch: 0.26989183 flow loss 0.07781762 occ loss 0.19145073 time for this batch 0.2622947692871094 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.22913764 flow loss 0.06680572 occ loss 0.16127306 time for this batch 0.2943706512451172 ---------------------------------- train loss for this epoch: 0.224196
time for this epoch 47.05207371711731 No_decrease: 10 ----------------an epoch starts------------------- i_epoch: 94 # batch: 125 i_batch: 0.0 the loss for this batch: 0.24007244 flow loss 0.06206041 occ loss 0.17762168 time for this batch 0.25510168075561523 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.2184454 flow loss 0.05619896 occ loss 0.16199675 time for this batch 0.2980616092681885 ---------------------------------- train loss for this epoch: 0.224684
time for this epoch 47.52310538291931 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 95 # batch: 125 i_batch: 0.0 the loss for this batch: 0.27375016 flow loss 0.074697755 occ loss 0.19865957 time for this batch 0.2587621212005615 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.19834377 flow loss 0.06265927 occ loss 0.13507587 time for this batch 0.29793667793273926 ---------------------------------- train loss for this epoch: 0.223323
time for this epoch 46.85683822631836 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 96 # batch: 125 i_batch: 0.0 the loss for this batch: 0.17677973 flow loss 0.051996917 occ loss 0.124561496 time for this batch 0.257068395614624 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.27920687 flow loss 0.07167217 occ loss 0.2068914 time for this batch 0.2996647357940674 ---------------------------------- train loss for this epoch: 0.223077
time for this epoch 46.76869440078735 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 97 # batch: 125 i_batch: 0.0 the loss for this batch: 0.23619846 flow loss 0.06622104 occ loss 0.16960652 time for this batch 0.25528931617736816 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.22709352 flow loss 0.06184226 occ loss 0.16483621 time for this batch 0.307098388671875 ---------------------------------- train loss for this epoch: 0.224456
time for this epoch 47.020057678222656 No_decrease: 3 ----------------an epoch starts------------------- i_epoch: 98 # batch: 125 i_batch: 0.0 the loss for this batch: 0.15823515 flow loss 0.04512996 occ loss 0.11255169 time for this batch 0.25237226486206055 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.22814828 flow loss 0.06662852 occ loss 0.16091661 time for this batch 0.27634358406066895 ---------------------------------- train loss for this epoch: 0.223944
time for this epoch 46.975518465042114 No_decrease: 4 ----------------an epoch starts------------------- i_epoch: 99 # batch: 125 i_batch: 0.0 the loss for this batch: 0.21167402 flow loss 0.059908684 occ loss 0.15146658 time for this batch 0.2670915126800537 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.2242257 flow loss 0.063878305 occ loss 0.15976354 time for this batch 0.30036401748657227 ---------------------------------- train loss for this epoch: 0.223027
time for this epoch 47.68540287017822 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 100 # batch: 125 i_batch: 0.0 the loss for this batch: 0.24973303 flow loss 0.059866983 occ loss 0.18911374 time for this batch 0.26645421981811523 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.1971277 flow loss 0.06095229 occ loss 0.135418 time for this batch 0.29178929328918457 ---------------------------------- train loss for this epoch: 0.224301
time for this epoch 47.20949149131775 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 101 # batch: 125 i_batch: 0.0 the loss for this batch: 0.23962826 flow loss 0.06150442 occ loss 0.17742729 time for this batch 0.25769662857055664 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.15409818 flow loss 0.04741452 occ loss 0.10632665 time for this batch 0.28556203842163086 ---------------------------------- train loss for this epoch: 0.222586
time for this epoch 47.15072011947632 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 102 # batch: 125 i_batch: 0.0 the loss for this batch: 0.15467055 flow loss 0.046791315 occ loss 0.107619084 time for this batch 0.2750985622406006 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.18649942 flow loss 0.051912438 occ loss 0.13397431 time for this batch 0.2899799346923828 ---------------------------------- train loss for this epoch: 0.222217
time for this epoch 47.02670431137085 No_decrease: 3 ----------------an epoch starts------------------- i_epoch: 103 # batch: 125 i_batch: 0.0 the loss for this batch: 0.23399526 flow loss 0.060445625 occ loss 0.17302832 time for this batch 0.25742340087890625 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.25926605 flow loss 0.06807922 occ loss 0.1907323 time for this batch 0.2941014766693115 ---------------------------------- train loss for this epoch: 0.222438
time for this epoch 47.382933616638184 No_decrease: 4 ----------------an epoch starts------------------- i_epoch: 104 # batch: 125 i_batch: 0.0 the loss for this batch: 0.22540803 flow loss 0.06217406 occ loss 0.16271056 time for this batch 0.2613379955291748 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.18226808 flow loss 0.05415452 occ loss 0.12772079 time for this batch 0.30111217498779297 ---------------------------------- train loss for this epoch: 0.220787
time for this epoch 46.63507866859436 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 105 # batch: 125 i_batch: 0.0 the loss for this batch: 0.18235563 flow loss 0.05128048 occ loss 0.13045818 time for this batch 0.272763729095459 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.25873244 flow loss 0.070794046 occ loss 0.18720824 time for this batch 0.29265713691711426 ---------------------------------- train loss for this epoch: 0.220799
time for this epoch 46.53801250457764 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 106 # batch: 125 i_batch: 0.0 the loss for this batch: 0.23151481 flow loss 0.06514883 occ loss 0.16575769 time for this batch 0.2507011890411377 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.28969324 flow loss 0.07104124 occ loss 0.21786486 time for this batch 0.2954566478729248 ---------------------------------- train loss for this epoch: 0.221814
time for this epoch 46.81429696083069 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 107 # batch: 125 i_batch: 0.0 the loss for this batch: 0.27902168 flow loss 0.07170975 occ loss 0.20690091 time for this batch 0.23834609985351562 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.21348573 flow loss 0.060039807 occ loss 0.15297793 time for this batch 0.30982398986816406 ---------------------------------- train loss for this epoch: 0.220691
time for this epoch 46.36823391914368 No_decrease: 3 ----------------an epoch starts------------------- i_epoch: 108 # batch: 125 i_batch: 0.0 the loss for this batch: 0.18806933 flow loss 0.056069244 occ loss 0.13152705 time for this batch 0.25530481338500977 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.19742467 flow loss 0.055862688 occ loss 0.14109188 time for this batch 0.27718091011047363 ---------------------------------- train loss for this epoch: 0.222094
time for this epoch 47.07396411895752 No_decrease: 4 ----------------an epoch starts------------------- i_epoch: 109 # batch: 125 i_batch: 0.0 the loss for this batch: 0.19742267 flow loss 0.05692082 occ loss 0.1401186 time for this batch 0.26055407524108887 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.13255581 flow loss 0.040006444 occ loss 0.09161787 time for this batch 0.2845637798309326 ---------------------------------- train loss for this epoch: 0.221785
time for this epoch 46.559916496276855 No_decrease: 5 ----------------an epoch starts------------------- i_epoch: 110 # batch: 125 i_batch: 0.0 the loss for this batch: 0.2917515 flow loss 0.07083675 occ loss 0.22052361 time for this batch 0.2646162509918213 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.24006897 flow loss 0.063179545 occ loss 0.17622955 time for this batch 0.2922067642211914 ---------------------------------- train loss for this epoch: 0.221
time for this epoch 47.83486890792847 No_decrease: 6 ----------------an epoch starts------------------- i_epoch: 111 # batch: 125 i_batch: 0.0 the loss for this batch: 0.21231051 flow loss 0.058298107 occ loss 0.1534221 time for this batch 0.2695333957672119 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.17765418 flow loss 0.048610426 occ loss 0.12837256 time for this batch 0.2981088161468506 ---------------------------------- train loss for this epoch: 0.219739
time for this epoch 47.25558567047119 No_decrease: 7 ----------------an epoch starts------------------- i_epoch: 112 # batch: 125 i_batch: 0.0 the loss for this batch: 0.29169002 flow loss 0.08190335 occ loss 0.20939149 time for this batch 0.2591419219970703 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.23999743 flow loss 0.067132264 occ loss 0.17251933 time for this batch 0.2809288501739502 ---------------------------------- train loss for this epoch: 0.221125
time for this epoch 47.2586829662323 No_decrease: 8 ----------------an epoch starts------------------- i_epoch: 113 # batch: 125 i_batch: 0.0 the loss for this batch: 0.27344596 flow loss 0.07202513 occ loss 0.20090409 time for this batch 0.26323461532592773 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.22564581 flow loss 0.058969002 occ loss 0.16609828 time for this batch 0.3000969886779785 ---------------------------------- train loss for this epoch: 0.219412
time for this epoch 47.52973031997681 No_decrease: 9 ----------------an epoch starts------------------- i_epoch: 114 # batch: 125 i_batch: 0.0 the loss for this batch: 0.20141241 flow loss 0.057376176 occ loss 0.14371713 time for this batch 0.2289109230041504 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.16331281 flow loss 0.04827935 occ loss 0.11438498 time for this batch 0.281569242477417 ---------------------------------- train loss for this epoch: 0.219969
time for this epoch 47.42804718017578 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 115 # batch: 125 i_batch: 0.0 the loss for this batch: 0.18093707 flow loss 0.057023883 occ loss 0.12360453 time for this batch 0.2786571979522705 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.25064263 flow loss 0.063117474 occ loss 0.18712452 time for this batch 0.30254554748535156 ---------------------------------- train loss for this epoch: 0.218985
time for this epoch 46.257829666137695 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 116 # batch: 125 i_batch: 0.0 the loss for this batch: 0.18182087 flow loss 0.055429623 occ loss 0.12591058 time for this batch 0.25534677505493164 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.29770887 flow loss 0.07634772 occ loss 0.22092253 time for this batch 0.30246591567993164 ---------------------------------- train loss for this epoch: 0.219443
time for this epoch 47.03590130805969 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 117 # batch: 125 i_batch: 0.0 the loss for this batch: 0.137828 flow loss 0.04458708 occ loss 0.09291511 time for this batch 0.2641575336456299 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.23416035 flow loss 0.066887625 occ loss 0.16673365 time for this batch 0.27777624130249023 ---------------------------------- train loss for this epoch: 0.219482
time for this epoch 47.74198269844055 No_decrease: 3 ----------------an epoch starts------------------- i_epoch: 118 # batch: 125 i_batch: 0.0 the loss for this batch: 0.20627113 flow loss 0.057425663 occ loss 0.1466743 time for this batch 0.29274821281433105 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.17514564 flow loss 0.05415994 occ loss 0.12062561 time for this batch 0.2958528995513916 ---------------------------------- train loss for this epoch: 0.219502
time for this epoch 47.68229532241821 No_decrease: 4 ----------------an epoch starts------------------- i_epoch: 119 # batch: 125 i_batch: 0.0 the loss for this batch: 0.19610979 flow loss 0.04946136 occ loss 0.14590828 time for this batch 0.25638461112976074 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.18806085 flow loss 0.05620066 occ loss 0.13136922 time for this batch 0.2900676727294922 ---------------------------------- train loss for this epoch: 0.219702
time for this epoch 47.07689905166626 No_decrease: 5 ----------------an epoch starts------------------- i_epoch: 120 # batch: 125 i_batch: 0.0 the loss for this batch: 0.22200584 flow loss 0.058634035 occ loss 0.1628272 time for this batch 0.2734682559967041 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.21803288 flow loss 0.0631919 occ loss 0.15445563 time for this batch 0.25345849990844727 ---------------------------------- train loss for this epoch: 0.218533
time for this epoch 47.934553146362305 No_decrease: 6 ----------------an epoch starts------------------- i_epoch: 121 # batch: 125 i_batch: 0.0 the loss for this batch: 0.2191689 flow loss 0.059665002 occ loss 0.15877832 time for this batch 0.28266263008117676 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.19132972 flow loss 0.06044713 occ loss 0.13046 time for this batch 0.311748743057251 ---------------------------------- train loss for this epoch: 0.219383
time for this epoch 47.13943672180176 No_decrease: 7 ----------------an epoch starts------------------- i_epoch: 122 # batch: 125 i_batch: 0.0 the loss for this batch: 0.25493687 flow loss 0.06701924 occ loss 0.18761837 time for this batch 0.2505645751953125 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.24063185 flow loss 0.06356003 occ loss 0.17645353 time for this batch 0.2849719524383545 ---------------------------------- train loss for this epoch: 0.219705
time for this epoch 47.135721921920776 No_decrease: 8 ----------------an epoch starts------------------- i_epoch: 123 # batch: 125 i_batch: 0.0 the loss for this batch: 0.24890995 flow loss 0.06884228 occ loss 0.1794167 time for this batch 0.2568776607513428 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.19091994 flow loss 0.057308834 occ loss 0.13336346 time for this batch 0.3012351989746094 ---------------------------------- train loss for this epoch: 0.218296
time for this epoch 46.95341157913208 No_decrease: 9 ----------------an epoch starts------------------- i_epoch: 124 # batch: 125 i_batch: 0.0 the loss for this batch: 0.1942326 flow loss 0.056310415 occ loss 0.13746749 time for this batch 0.27208995819091797 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.22632639 flow loss 0.08222665 occ loss 0.14301272 time for this batch 0.2944605350494385 ---------------------------------- train loss for this epoch: 0.222706
time for this epoch 48.3430438041687 No_decrease: 10 ----------------an epoch starts------------------- i_epoch: 125 # batch: 125 i_batch: 0.0 the loss for this batch: 0.18086606 flow loss 0.049726184 occ loss 0.13041596 time for this batch 0.2680931091308594 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.27108976 flow loss 0.06777339 occ loss 0.20284988 time for this batch 0.2963564395904541 ---------------------------------- train loss for this epoch: 0.217301
time for this epoch 47.73315095901489 No_decrease: 11 ----------------an epoch starts------------------- i_epoch: 126 # batch: 125 i_batch: 0.0 the loss for this batch: 0.24381572 flow loss 0.06806336 occ loss 0.17519939 time for this batch 0.25179338455200195 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.20206064 flow loss 0.051935032 occ loss 0.14967956 time for this batch 0.2917478084564209 ---------------------------------- train loss for this epoch: 0.216874
time for this epoch 47.798412799835205 No_decrease: 12 ----------------an epoch starts------------------- i_epoch: 127 # batch: 125 i_batch: 0.0 the loss for this batch: 0.25494295 flow loss 0.06772278 occ loss 0.18687436 time for this batch 0.2470531463623047 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.3097865 flow loss 0.08125269 occ loss 0.22807676 time for this batch 0.27557826042175293 ---------------------------------- train loss for this epoch: 0.217544
time for this epoch 47.4901180267334 No_decrease: 13 ----------------an epoch starts------------------- i_epoch: 128 # batch: 125 i_batch: 0.0 the loss for this batch: 0.21407175 flow loss 0.060442757 occ loss 0.15320736 time for this batch 0.26371216773986816 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.22686146 flow loss 0.060660727 occ loss 0.16557744 time for this batch 0.2957170009613037 ---------------------------------- train loss for this epoch: 0.217229
time for this epoch 47.67625665664673 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 129 # batch: 125 i_batch: 0.0 the loss for this batch: 0.28906953 flow loss 0.07641371 occ loss 0.21203063 time for this batch 0.27312564849853516 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.21574385 flow loss 0.060117442 occ loss 0.15532373 time for this batch 0.28853869438171387 ---------------------------------- train loss for this epoch: 0.217226
time for this epoch 46.783326148986816 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 130 # batch: 125 i_batch: 0.0 the loss for this batch: 0.23663773 flow loss 0.062352233 occ loss 0.17367063 time for this batch 0.26282596588134766 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.24551424 flow loss 0.068652056 occ loss 0.17652288 time for this batch 0.2981224060058594 ---------------------------------- train loss for this epoch: 0.217464
time for this epoch 47.159279346466064 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 131 # batch: 125 i_batch: 0.0 the loss for this batch: 0.24094765 flow loss 0.06359128 occ loss 0.17682713 time for this batch 0.23296904563903809 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.23442681 flow loss 0.065042235 occ loss 0.16868311 time for this batch 0.2895796298980713 ---------------------------------- train loss for this epoch: 0.217065
time for this epoch 47.42090559005737 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 132 # batch: 125 i_batch: 0.0 the loss for this batch: 0.24343115 flow loss 0.06049733 occ loss 0.18245104 time for this batch 0.2698979377746582 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.2860859 flow loss 0.07262067 occ loss 0.21295094 time for this batch 0.30417776107788086 ---------------------------------- train loss for this epoch: 0.217734
time for this epoch 46.61857461929321 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 133 # batch: 125 i_batch: 0.0 the loss for this batch: 0.19150177 flow loss 0.055533618 occ loss 0.1354183 time for this batch 0.2515866756439209 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.21237203 flow loss 0.057910956 occ loss 0.15379405 time for this batch 0.2862966060638428 ---------------------------------- train loss for this epoch: 0.218796
time for this epoch 47.555232524871826 No_decrease: 3 ----------------an epoch starts------------------- i_epoch: 134 # batch: 125 i_batch: 0.0 the loss for this batch: 0.2832797 flow loss 0.06971473 occ loss 0.21291472 time for this batch 0.22903823852539062 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.21771365 flow loss 0.055280354 occ loss 0.16176465 time for this batch 0.30055689811706543 ---------------------------------- train loss for this epoch: 0.21592
time for this epoch 47.08804678916931 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 135 # batch: 125 i_batch: 0.0 the loss for this batch: 0.1714489 flow loss 0.047168158 occ loss 0.12388081 time for this batch 0.2606794834136963 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.19015616 flow loss 0.055162027 occ loss 0.13471241 time for this batch 0.2975146770477295 ---------------------------------- train loss for this epoch: 0.218441
time for this epoch 48.16963982582092 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 136 # batch: 125 i_batch: 0.0 the loss for this batch: 0.23863824 flow loss 0.06336951 occ loss 0.174927 time for this batch 0.26873111724853516 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.18926926 flow loss 0.057131644 occ loss 0.13148463 time for this batch 0.2895185947418213 ---------------------------------- train loss for this epoch: 0.216741
time for this epoch 47.881932973861694 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 137 # batch: 125 i_batch: 0.0 the loss for this batch: 0.18138586 flow loss 0.05174737 occ loss 0.12935154 time for this batch 0.2786080837249756 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.2865008 flow loss 0.069707714 occ loss 0.21624604 time for this batch 0.2628183364868164 ---------------------------------- train loss for this epoch: 0.215972
time for this epoch 46.26638650894165 No_decrease: 3 ----------------an epoch starts------------------- i_epoch: 138 # batch: 125 i_batch: 0.0 the loss for this batch: 0.2184561 flow loss 0.059811138 occ loss 0.15834703 time for this batch 0.2605397701263428 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.19247743 flow loss 0.054656785 occ loss 0.13748923 time for this batch 0.2760031223297119 ---------------------------------- train loss for this epoch: 0.21573
time for this epoch 47.09421467781067 No_decrease: 4 ----------------an epoch starts------------------- i_epoch: 139 # batch: 125 i_batch: 0.0 the loss for this batch: 0.2587137 flow loss 0.0709266 occ loss 0.1871278 time for this batch 0.23897886276245117 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.21398732 flow loss 0.062047906 occ loss 0.15109289 time for this batch 0.30007171630859375 ---------------------------------- train loss for this epoch: 0.21603
time for this epoch 47.498694896698 No_decrease: 5 ----------------an epoch starts------------------- i_epoch: 140 # batch: 125 i_batch: 0.0 the loss for this batch: 0.13423513 flow loss 0.040231153 occ loss 0.0936144 time for this batch 0.27071261405944824 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.21943593 flow loss 0.058191225 occ loss 0.1608055 time for this batch 0.2946968078613281 ---------------------------------- train loss for this epoch: 0.216014
time for this epoch 47.295650243759155 No_decrease: 6 ----------------an epoch starts------------------- i_epoch: 141 # batch: 125 i_batch: 0.0 the loss for this batch: 0.15996872 flow loss 0.047252495 occ loss 0.11235078 time for this batch 0.283494234085083 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.21580355 flow loss 0.058304362 occ loss 0.15717565 time for this batch 0.29706478118896484 ---------------------------------- train loss for this epoch: 0.216289
time for this epoch 46.985618591308594 No_decrease: 7 ----------------an epoch starts------------------- i_epoch: 142 # batch: 125 i_batch: 0.0 the loss for this batch: 0.26205006 flow loss 0.066241875 occ loss 0.1954116 time for this batch 0.26685261726379395 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.18488818 flow loss 0.051683094 occ loss 0.13285 time for this batch 0.29510498046875 ---------------------------------- train loss for this epoch: 0.21517
time for this epoch 47.459959745407104 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 143 # batch: 125 i_batch: 0.0 the loss for this batch: 0.15351094 flow loss 0.04612854 occ loss 0.107108034 time for this batch 0.2781643867492676 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.27597365 flow loss 0.07131522 occ loss 0.20379542 time for this batch 0.30126166343688965 ---------------------------------- train loss for this epoch: 0.215707
time for this epoch 46.75617218017578 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 144 # batch: 125 i_batch: 0.0 the loss for this batch: 0.16967164 flow loss 0.04438633 occ loss 0.12429821 time for this batch 0.271148681640625 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.21554504 flow loss 0.061100177 occ loss 0.15413792 time for this batch 0.2776041030883789 ---------------------------------- train loss for this epoch: 0.21681
time for this epoch 47.2680549621582 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 145 # batch: 125 i_batch: 0.0 the loss for this batch: 0.22468543 flow loss 0.062223434 occ loss 0.16209649 time for this batch 0.18397212028503418 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.19971019 flow loss 0.05595283 occ loss 0.14334254 time for this batch 0.30547046661376953 ---------------------------------- train loss for this epoch: 0.215279
time for this epoch 47.45968055725098 No_decrease: 3 ----------------an epoch starts------------------- i_epoch: 146 # batch: 125 i_batch: 0.0 the loss for this batch: 0.2626447 flow loss 0.06555498 occ loss 0.19653267 time for this batch 0.27297425270080566 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.24557774 flow loss 0.06548678 occ loss 0.17961445 time for this batch 0.2568628787994385 ---------------------------------- train loss for this epoch: 0.216027
time for this epoch 43.04589533805847 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 147 # batch: 125 i_batch: 0.0 the loss for this batch: 0.31037328 flow loss 0.072889775 occ loss 0.23669554 time for this batch 0.2297370433807373 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.21295089 flow loss 0.0629737 occ loss 0.14955696 time for this batch 0.2725965976715088 ---------------------------------- train loss for this epoch: 0.215039
time for this epoch 42.96318769454956 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 148 # batch: 125 i_batch: 0.0 the loss for this batch: 0.18947263 flow loss 0.05348306 occ loss 0.13569732 time for this batch 0.20957732200622559 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.20614737 flow loss 0.054906465 occ loss 0.15087566 time for this batch 0.2771327495574951 ---------------------------------- train loss for this epoch: 0.215551
time for this epoch 41.0017511844635 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 149 # batch: 125 i_batch: 0.0 the loss for this batch: 0.21346624 flow loss 0.058393594 occ loss 0.1547935 time for this batch 0.23520708084106445 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.19492403 flow loss 0.055638794 occ loss 0.13865758 time for this batch 0.26777148246765137 ---------------------------------- train loss for this epoch: 0.21494
time for this epoch 40.32922840118408 No_decrease: 3 ----------------an epoch starts------------------- i_epoch: 150 # batch: 125 i_batch: 0.0 the loss for this batch: 0.2650012 flow loss 0.06592083 occ loss 0.19845401 time for this batch 0.20517492294311523 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.18652774 flow loss 0.052242838 occ loss 0.13392787 time for this batch 0.2598299980163574 ---------------------------------- train loss for this epoch: 0.20845
time for this epoch 40.6515531539917 No_decrease: 0 ----------------an epoch starts------------------- i_epoch: 151 # batch: 125 i_batch: 0.0 the loss for this batch: 0.21832016 flow loss 0.05495089 occ loss 0.16296105 time for this batch 0.19799113273620605 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.2223007 flow loss 0.057496794 occ loss 0.1643562 time for this batch 0.24366450309753418 ---------------------------------- train loss for this epoch: 0.207457
time for this epoch 40.1722366809845 No_decrease: 1 ----------------an epoch starts------------------- i_epoch: 152 # batch: 125 i_batch: 0.0 the loss for this batch: 0.2584956 flow loss 0.06951432 occ loss 0.18856917 time for this batch 0.19930386543273926 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.2182394 flow loss 0.056593727 occ loss 0.16115071 time for this batch 0.26369595527648926 ---------------------------------- train loss for this epoch: 0.206302
time for this epoch 41.328067779541016 No_decrease: 2 ----------------an epoch starts------------------- i_epoch: 153 # batch: 125 i_batch: 0.0 the loss for this batch: 0.20626646 flow loss 0.06032289 occ loss 0.14561778 time for this batch 0.2114100456237793 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.20724377 flow loss 0.055035625 occ loss 0.15191664 time for this batch 0.2182612419128418 ---------------------------------- train loss for this epoch: 0.206471
time for this epoch 36.533257722854614 No_decrease: 3 ----------------an epoch starts------------------- i_epoch: 154 # batch: 125 i_batch: 0.0 the loss for this batch: 0.23340899 flow loss 0.06528624 occ loss 0.16760686 time for this batch 0.20197224617004395 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.21805769 flow loss 0.05807702 occ loss 0.1596145 time for this batch 0.2395472526550293 ---------------------------------- train loss for this epoch: 0.206819
time for this epoch 38.30153965950012 No_decrease: 4 ----------------an epoch starts------------------- i_epoch: 155 # batch: 125 i_batch: 0.0 the loss for this batch: 0.26771277 flow loss 0.06677699 occ loss 0.20052223 time for this batch 0.20337986946105957 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.24591903 flow loss 0.06180777 occ loss 0.18374394 time for this batch 0.26564908027648926 ---------------------------------- train loss for this epoch: 0.206545
time for this epoch 40.455445528030396 No_decrease: 5 ----------------an epoch starts------------------- i_epoch: 156 # batch: 125 i_batch: 0.0 the loss for this batch: 0.25931358 flow loss 0.063069426 occ loss 0.19582438 time for this batch 0.2645440101623535 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.2185557 flow loss 0.060834862 occ loss 0.15721765 time for this batch 0.260089635848999 ---------------------------------- train loss for this epoch: 0.206513
time for this epoch 41.4960401058197 No_decrease: 6 ----------------an epoch starts------------------- i_epoch: 157 # batch: 125 i_batch: 0.0 the loss for this batch: 0.23159291 flow loss 0.062200174 occ loss 0.16903917 time for this batch 0.21068143844604492 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.15574284 flow loss 0.04433285 occ loss 0.11110937 time for this batch 0.2600581645965576 ---------------------------------- train loss for this epoch: 0.206523
time for this epoch 41.37224745750427 No_decrease: 7 ----------------an epoch starts------------------- i_epoch: 158 # batch: 125 i_batch: 0.0 the loss for this batch: 0.18698826 flow loss 0.051358406 occ loss 0.13536468 time for this batch 0.23157620429992676 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.26634187 flow loss 0.07185249 occ loss 0.19406308 time for this batch 0.23673105239868164 ---------------------------------- train loss for this epoch: 0.206613
time for this epoch 39.641467571258545 No_decrease: 8 ----------------an epoch starts------------------- i_epoch: 159 # batch: 125 i_batch: 0.0 the loss for this batch: 0.12973377 flow loss 0.041348904 occ loss 0.088110164 time for this batch 0.21560239791870117 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.19860072 flow loss 0.055671062 occ loss 0.14268817 time for this batch 0.26464343070983887 ---------------------------------- train loss for this epoch: 0.206445
time for this epoch 41.35438585281372 No_decrease: 9 ----------------an epoch starts------------------- i_epoch: 160 # batch: 125 i_batch: 0.0 the loss for this batch: 0.18185477 flow loss 0.05018213 occ loss 0.13144426 time for this batch 0.15895605087280273 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.16294473 flow loss 0.044129964 occ loss 0.11846012 time for this batch 0.20165467262268066 ---------------------------------- train loss for this epoch: 0.206113
time for this epoch 41.988266944885254 No_decrease: 10 ----------------an epoch starts------------------- i_epoch: 161 # batch: 125 i_batch: 0.0 the loss for this batch: 0.17527081 flow loss 0.048129216 occ loss 0.12680309 time for this batch 0.23383784294128418 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.22511855 flow loss 0.061881833 occ loss 0.16261823 time for this batch 0.2552330493927002 ---------------------------------- train loss for this epoch: 0.206399
time for this epoch 40.80246019363403 No_decrease: 11 ----------------an epoch starts------------------- i_epoch: 162 # batch: 125 i_batch: 0.0 the loss for this batch: 0.2203226 flow loss 0.05723025 occ loss 0.16282006 time for this batch 0.15674376487731934 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.15136266 flow loss 0.046818107 occ loss 0.10431208 time for this batch 0.2350156307220459 ---------------------------------- train loss for this epoch: 0.206084
time for this epoch 39.95799803733826 No_decrease: 12 ----------------an epoch starts------------------- i_epoch: 163 # batch: 125 i_batch: 0.0 the loss for this batch: 0.16990283 flow loss 0.04811989 occ loss 0.12147142 time for this batch 0.22716331481933594 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.20145062 flow loss 0.056037873 occ loss 0.1451184 time for this batch 0.23952221870422363 ---------------------------------- train loss for this epoch: 0.206061
time for this epoch 41.36119079589844 No_decrease: 13 ----------------an epoch starts------------------- i_epoch: 164 # batch: 125 i_batch: 0.0 the loss for this batch: 0.17456132 flow loss 0.056322545 occ loss 0.1178682 time for this batch 0.1961066722869873 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.26600003 flow loss 0.0706007 occ loss 0.1950615 time for this batch 0.2719240188598633 ---------------------------------- train loss for this epoch: 0.205913
time for this epoch 40.808897495269775 No_decrease: 14 ----------------an epoch starts------------------- i_epoch: 165 # batch: 125 i_batch: 0.0 the loss for this batch: 0.21899644 flow loss 0.058634043 occ loss 0.1600853 time for this batch 0.23692941665649414 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.16579236 flow loss 0.045831002 occ loss 0.11961085 time for this batch 0.27388834953308105 ---------------------------------- train loss for this epoch: 0.206217
time for this epoch 41.22698378562927 No_decrease: 15 ----------------an epoch starts------------------- i_epoch: 166 # batch: 125 i_batch: 0.0 the loss for this batch: 0.20885094 flow loss 0.056882013 occ loss 0.15152061 time for this batch 0.21873760223388672 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.23038949 flow loss 0.057549156 occ loss 0.17226219 time for this batch 0.2722768783569336 ---------------------------------- train loss for this epoch: 0.205718
time for this epoch 41.53787112236023 No_decrease: 16 ----------------an epoch starts------------------- i_epoch: 167 # batch: 125 i_batch: 0.0 the loss for this batch: 0.17017916 flow loss 0.04673846 occ loss 0.12309806 time for this batch 0.21821379661560059 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.23251389 flow loss 0.05742092 occ loss 0.17469952 time for this batch 0.2789011001586914 ---------------------------------- train loss for this epoch: 0.205897
time for this epoch 41.089350938797 No_decrease: 17 ----------------an epoch starts------------------- i_epoch: 168 # batch: 125 i_batch: 0.0 the loss for this batch: 0.1731047 flow loss 0.04883546 occ loss 0.12393721 time for this batch 0.19205737113952637 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.22066103 flow loss 0.0582116 occ loss 0.16188936 time for this batch 0.2479252815246582 ---------------------------------- train loss for this epoch: 0.205674
time for this epoch 41.29987168312073 No_decrease: 18 ----------------an epoch starts------------------- i_epoch: 169 # batch: 125 i_batch: 0.0 the loss for this batch: 0.14919363 flow loss 0.042612646 occ loss 0.10623901 time for this batch 0.2384960651397705 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.25613397 flow loss 0.06316508 occ loss 0.19237769 time for this batch 0.22958111763000488 ---------------------------------- train loss for this epoch: 0.205696
time for this epoch 40.368603229522705 No_decrease: 19 ----------------an epoch starts------------------- i_epoch: 170 # batch: 125 i_batch: 0.0 the loss for this batch: 0.18986031 flow loss 0.05398669 occ loss 0.13559859 time for this batch 0.22502970695495605 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.12513039 flow loss 0.038568143 occ loss 0.08633053 time for this batch 0.23203468322753906 ---------------------------------- train loss for this epoch: 0.205677
time for this epoch 39.80390691757202 No_decrease: 20 ----------------an epoch starts------------------- i_epoch: 171 # batch: 125 i_batch: 0.0 the loss for this batch: 0.17501144 flow loss 0.047059365 occ loss 0.12763153 time for this batch 0.19309353828430176 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.25352705 flow loss 0.06460174 occ loss 0.1883572 time for this batch 0.27330636978149414 ---------------------------------- train loss for this epoch: 0.205906
time for this epoch 41.107749700546265 No_decrease: 21 ----------------an epoch starts------------------- i_epoch: 172 # batch: 125 i_batch: 0.0 the loss for this batch: 0.24074893 flow loss 0.068971716 occ loss 0.17125596 time for this batch 0.2523798942565918 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.20906834 flow loss 0.059029896 occ loss 0.14968686 time for this batch 0.2816154956817627 ---------------------------------- train loss for this epoch: 0.205312
time for this epoch 41.683329820632935 No_decrease: 22 ----------------an epoch starts------------------- i_epoch: 173 # batch: 125 i_batch: 0.0 the loss for this batch: 0.17736349 flow loss 0.05118203 occ loss 0.1259411 time for this batch 0.23881030082702637 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.19436261 flow loss 0.053372502 occ loss 0.14072385 time for this batch 0.2540004253387451 ---------------------------------- train loss for this epoch: 0.205558
time for this epoch 40.00807571411133 No_decrease: 23 ----------------an epoch starts------------------- i_epoch: 174 # batch: 125 i_batch: 0.0 the loss for this batch: 0.2129353 flow loss 0.055335198 occ loss 0.15706316 time for this batch 0.2583446502685547 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.16701527 flow loss 0.04971849 occ loss 0.116997965 time for this batch 0.25382256507873535 ---------------------------------- train loss for this epoch: 0.205757
time for this epoch 41.2247896194458 No_decrease: 24 ----------------an epoch starts------------------- i_epoch: 175 # batch: 125 i_batch: 0.0 the loss for this batch: 0.18363215 flow loss 0.0556709 occ loss 0.12765455 time for this batch 0.2131640911102295 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.19202879 flow loss 0.057532594 occ loss 0.13420253 time for this batch 0.2504408359527588 ---------------------------------- train loss for this epoch: 0.205499
time for this epoch 39.81772208213806 No_decrease: 25 ----------------an epoch starts------------------- i_epoch: 176 # batch: 125 i_batch: 0.0 the loss for this batch: 0.19227134 flow loss 0.051590644 occ loss 0.1401833 time for this batch 0.2418968677520752 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.16841443 flow loss 0.048155714 occ loss 0.11987661 time for this batch 0.2654738426208496 ---------------------------------- train loss for this epoch: 0.205523
time for this epoch 40.44121217727661 No_decrease: 26 ----------------an epoch starts------------------- i_epoch: 177 # batch: 125 i_batch: 0.0 the loss for this batch: 0.20141819 flow loss 0.05632822 occ loss 0.14465709 time for this batch 0.19767141342163086 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.18843658 flow loss 0.054312862 occ loss 0.13392574 time for this batch 0.2526674270629883 ---------------------------------- train loss for this epoch: 0.205271
time for this epoch 41.15841507911682 No_decrease: 27 ----------------an epoch starts------------------- i_epoch: 178 # batch: 125 i_batch: 0.0 the loss for this batch: 0.2154789 flow loss 0.059528172 occ loss 0.15569629 time for this batch 0.2210979461669922 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.22521336 flow loss 0.061221283 occ loss 0.16358265 time for this batch 0.26985740661621094 ---------------------------------- train loss for this epoch: 0.20514
time for this epoch 41.647724866867065 No_decrease: 28 ----------------an epoch starts------------------- i_epoch: 179 # batch: 125 i_batch: 0.0 the loss for this batch: 0.21368484 flow loss 0.055513304 occ loss 0.15779634 time for this batch 0.20692896842956543 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.23669542 flow loss 0.06649097 occ loss 0.16990575 time for this batch 0.25688934326171875 ---------------------------------- train loss for this epoch: 0.205392
time for this epoch 40.62503910064697 No_decrease: 29 ----------------an epoch starts------------------- i_epoch: 180 # batch: 125 i_batch: 0.0 the loss for this batch: 0.22305347 flow loss 0.05948866 occ loss 0.16327912 time for this batch 0.24269723892211914 ---------------------------------- i_batch: 64.0 the loss for this batch: 0.19205001 flow loss 0.054287605 occ loss 0.13735195 time for this batch 0.2603893280029297 ---------------------------------- train loss for this epoch: 0.205328
time for this epoch 41.539491415023804 Early stop at the 181-th epoch
def apply_to_vali_test(model, vt, f_o_mean_std):
f = vt["flow"]
f_m = vt["flow_mask"].to(device)
o = vt["occupancy"]
o_m = vt["occupancy_mask"].to(device)
f_mae, f_rmse, o_mae, o_rmse = vali_test(model, f, f_m, o, o_m, f_o_mean_std, hyper["b_s_vt"])
print ("flow_mae", f_mae)
print ("flow_rmse", f_rmse)
print ("occ_mae", o_mae)
print ("occ_rmse", o_rmse)
return f_mae, f_rmse, o_mae, o_rmse
vali_f_mae, vali_f_rmse, vali_o_mae, vali_o_rmse =\
apply_to_vali_test(trained_model, vali, f_o_mean_std)
flow_mae 35.79439089914421 flow_rmse 55.05386218850903 occ_mae 0.0392215118683922 occ_rmse 0.08028854952676243
test_f_mae, test_f_rmse, test_o_mae, test_o_rmse =\
apply_to_vali_test(trained_model, test, f_o_mean_std)
flow_mae 33.85003701170569 flow_rmse 52.30893986700123 occ_mae 0.032333377308166586 occ_rmse 0.06913925713477638